home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Tech Arsenal 1
/
Tech Arsenal (Arsenal Computer).ISO
/
tek-04
/
aie8911.zip
/
NEXT_TOK.ARI
< prev
next >
Wrap
Text File
|
1989-05-31
|
6KB
|
249 lines
%%%%%%%%%% end prepcomp generated declarations %%%%%%%%%%%%%%%%%%%%
% :- module next_tok.
:- extrn next_tok_trace / 0 : interp.
next_tok_trace( X) :-
call( next_tok_trace),
!,
trace_message(X).
next_tok_trace( _).
%%%%%%%%%%%%%%%%%%%%% tokens_between_sep %%%%%%%%%%%%%%%%%%%%%%%%%%%
tokens_between_sep( _ , In, _ ) :-
next_tok_trace([$e tokens_between_sep , In = $, In]),
fail.
tokens_between_sep( [] , [], [] ) :- !.
tokens_between_sep(Tokens , In, Out ) :-
( string( In) ; atom(In)),
!,
list_text( List, In),
tokens_between_sep( Tokens , List, Out ) .
tokens_between_sep( [H|T] ) -->
next_token_between_sep( H ),
!,
tokens_between_sep(T ).
tokens_between_sep( [], _, _) :-!.
%%%%%%%%%%%%%%%% next_token_between_sep %%%%%%%%%%%%%%%%%%%%%%%%%%%
/* This returns the next token, where a token is whatever is between
separators or commas.
CALL: next_token_between_sep(Token,
Input_list_of_chars,
Left_over_tail_of_input)
*/
:- mode next_token_between_sep( ? , +, ?).
next_token_between_sep( _ , In, _ ) :-
next_tok_trace([$e next_token_between_sep , In = $, In]),
fail.
next_token_between_sep( _ , [], _ ) :-
!,
fail.
next_token_between_sep(Token , In, Out ) :-
( string( In) ; atom(In)),
!,
list_text( List, In),
next_token_between_sep( Token , List, Out ) .
next_token_between_sep(Token) -->
separator_or_comma(_),
!,
next_token_between_sep(Token ).
next_token_between_sep($($) -->
[`(],
!.
next_token_between_sep($)$) -->
[`)],
!.
next_token_between_sep(Token) -->
[X],
!,
between_sep_tail(Y),
{list_text( [X|Y], Token)}.
between_sep_tail( _ , In, _ ) :-
next_tok_trace([$e between_sep_tail , In = $, In]),
fail.
between_sep_tail( [] ) -->
separator_or_comma(_),
!.
between_sep_tail( [], Rest, Rest ) :-
Rest = [H|_],
( H == `(; H==`) ),
!.
between_sep_tail( [H | T ] ) -->
[ H ],
!,
between_sep_tail( T ).
between_sep_tail( [ ] ) --> !.
separator_or_comma(X) -->
separator(X), !.
separator_or_comma(X) -->
comma(X), !.
comma(X) --> [X], {X = `,}.
%%%%%%%%%%%%%%%%%%%% next_token %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
/*
next_token(Token,
Input_list_of_chars,
Left_over_tail_of_input)
returns the next Token in Input_list_of_chars.
In more detail,
next_token(Token) --> returns a single typed token, which is either
digits(D) where D is a string of digits
alphanum(A) where A is an alphanum string
eof($EOF$) when an end of file char is encountered
currently ASCII 26
char(C) where C is a 1-char string containing a
non-separator non-alphanum char
The 4-argument versions are called like this:
next_token(Token,
Special_token,
Input_list_of_chars,
Left_over_tail_of_input)
Special_token is the name of a 3-arg. predicate such as is defined by
DCG rules. It returns special tokens.
For example, we might define
a DCG rule to identify maker strings like this: ****************.
The definition of a DCG to do this is
*/
marker_string( marker(Token ) ) --> [`*], !, marker_tail(T),
{list_text( [`* |T], Token)}.
marker_tail([`*|T]) --> [`*], !, marker_tail(T).
marker_tail( []) --> !.
/*
Calling
list_text( Input_list_of_chars, $123.abc*****$),
tokens(Token,
marker,
Input_list_of_chars,
Left_over_tail_of_input)
instantiates
Token = [ digits($123$), char($.$), alphanum($abc$), marker($*****$)]
FILES TO RECONSULT IN ADDITION TO THIS ONE:
:- reconsult($\lib\strings\IS_CHAR$).
:- reconsult($\lib\strings\scan\sep$).
EXAMPLE:
*/
testpred :-
write($Enter a line: $),
read_line(0,Response),
list_text(List, Response),
next_token(Token,
marker_string,
List,
Left_over_tail_of_input),
write($ Token = $),write(Token),nl,
write($ Left_over_tail_of_input = $),
write(Left_over_tail_of_input),nl,
% now call tokens
tokens(Tokens,
marker_string,
List,
[]),
write($ Tokens = $),write(Tokens),nl.
%%%%%%%%%%%%%%%%% code starts here %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
next_token(Token) --> next_token(Token, fail ).
next_token(Structure, Rulepred, Chars, Left_over) :-
current_predicate(Rulepred / 3),
Term =.. [Rulepred, Structure, Chars, Left_over],
call(Term),!.
next_token(Token, Rulepred) --> separator(_),!,
next_token(Token, Rulepred).
next_token(alphanum(Token),_) --> alpha(A), !, alphanum_tail(T),
{list_text([A|T],Token)}.
next_token(digits(Token),_) --> digit(A), !, digit_tail(T),
{list_text([A|T],Token)}.
next_token(eof($EOF$),_) --> [26], !.
next_token(char(Token),_) --> [T], !,
{list_text([T],Token)}, !.
alphanum_tail([H|T]) --> alphanum(H),
alphanum_tail(T),!.
alphanum_tail([]) -->!.
digit_tail([H|T]) --> digit(H),
digit_tail(T),!.
digit_tail([]) -->!.
digit(A) --> [A], {is_digit(A)}.
alpha(A) --> [A], {is_letter(A)}.
alphanum(H) --> [H],{is_alphanum( H ) }.
%%%%%%%%%%%%%%%%%%%%%%%%% tokens %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
/* finds all the tokens in a list of chars */
tokens(L) --> tokens(L, fail).
tokens([H|T], Rulepred) --> next_token( H, Rulepred ), !,
tokens(T, Rulepred).
tokens([], _ ) --> !.
%%%%%%%%%%%%%%%% end next_token %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%